Compare commits

..

50 Commits

Author SHA1 Message Date
Ville Skyttä
0bd8f41173 Fix state class info in warn_dip/negative docstrings 2025-11-27 22:26:56 +02:00
Ville Skyttä
74dec7d40f Use state class constants in dip/negative warning messages 2025-11-27 22:24:29 +02:00
Ville Skyttä
ecdc196119 Clarify previous state in dip warning 2025-11-27 22:20:16 +02:00
StaleLoafOfBread
f1ee0e4ac9 Add support for gallons per day as a unit of volume flow rate (#157394) 2025-11-27 20:42:16 +01:00
Joakim Plate
5f522e5afa Fix cancel propagation in update coordinator and config entry (#153504) 2025-11-27 19:48:45 +01:00
Thomas55555
4f6624d0aa Fix strings in Google Air Quality (#157297) 2025-11-27 19:26:33 +01:00
epenet
70990645a7 Mark config-flow as done in SFR Box IQS (#157439) 2025-11-27 19:14:13 +01:00
Andrew Jackson
2f7d74ff62 Add icons to transmission entities (#157436) 2025-11-27 18:38:32 +01:00
epenet
885667832b Add initial IQS to sfr_box (#155419) 2025-11-27 18:36:51 +01:00
Petro31
4646929987 Avoid custom template platform deprecations (#157415) 2025-11-27 18:06:29 +01:00
Petro31
010aea952c Reload templates when labs flag automation.new_triggers_conditions is set (#157368) 2025-11-27 18:05:33 +01:00
Bram Kragten
563678dc47 Update frontend to 20251127.0 (#157431) 2025-11-27 18:05:18 +01:00
epenet
a48f01f213 Raise UpdateFailed if API returns None in sfr_box (#157434) 2025-11-27 18:01:56 +01:00
Andrew Jackson
08b758b0d2 Add device info and parallel_updates to Transmission (#157423) 2025-11-27 17:37:27 +01:00
Allen Porter
4306fbea52 Fix regression in roborock image entity naming (#157432) 2025-11-27 17:36:18 +01:00
Robert Resch
6f4c479f8f Use same cosign version in build workflow (#157365) 2025-11-27 17:13:04 +01:00
Shay Levy
1d9c06264e Fix Shelly support for button5 trigger (#157422) 2025-11-27 16:38:45 +01:00
epenet
d045ecaf13 Add parallel_updates to SFR Box (#157426) 2025-11-27 16:04:25 +01:00
Markus Jacobsen
f7c41e694c Add media content id attribute to Bang & Olufsen (#156597) 2025-11-27 15:53:43 +01:00
Kamil Breguła
9ee7ed5cdb Fix MAC address mix-ups between WLED devices (#155491)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-11-27 15:10:32 +01:00
Denis Shulyaka
83c4e2abc9 Fix Anthropic init with incorrect model (#157421) 2025-11-27 14:16:46 +01:00
Erik Montnemery
a7dbf551a3 Add climate started_cooling and started_drying triggers (#156945) 2025-11-27 12:41:08 +01:00
Petro31
0b2bb9f6bf Modernize template binary sensor (#157279) 2025-11-27 12:28:16 +01:00
tan-lawrence
0769163b67 Use "medium" instead of "med" for the medium fan mode in Coolmaster (#157253) 2025-11-27 12:27:49 +01:00
Robert Resch
2bb51e1146 Reduce Devcontainer docker layers (#157412) 2025-11-27 12:27:18 +01:00
Paulus Schoutsen
d2248d282c Default conversation agent to store tool calls in chat log (#157377) 2025-11-27 12:27:03 +01:00
Jan Čermák
8fe79a88ca Fix state classes of Ecowitt rain sensors (#157409) 2025-11-27 12:24:28 +01:00
Jaap Pieroen
7a328539b2 Bugfix: Essent remove average gas price today (#157317) 2025-11-27 12:24:07 +01:00
abelyliu
ec69efee4d Fix parsing of Tuya electricity RAW values (#157039) 2025-11-27 12:23:33 +01:00
Shay Levy
dbcde549d4 Update Shelly coordinator coverage to 100% (#157380) 2025-11-27 12:22:19 +01:00
Michael
988355e138 Add tests for the switch platform to the AdGuard Home integration (#157105) 2025-11-27 12:21:23 +01:00
victorigualada
7711eac607 Return early when setting cloud ai_task and conversation and not logged in to cloud (#157402) 2025-11-27 12:20:42 +01:00
Denis Shulyaka
32fe53cceb Add anthropic model to the device info (#157413) 2025-11-27 12:16:05 +01:00
Andrew Jackson
3a65d3c0dc Add tests to Transmission (#157355) 2025-11-27 12:15:10 +01:00
epenet
7fe26223ac Bump renault-api to 0.5.1 (#157411) 2025-11-27 12:06:57 +01:00
victorigualada
7e8496afb2 Bump hass-nabucasa from 1.6.1 to 1.6.2 (#157405) 2025-11-27 11:40:50 +01:00
Paulus Schoutsen
2ec5190243 Install requirements_test_all in dev (#157392) 2025-11-27 10:30:50 +01:00
Erik Montnemery
a706db8fdb Minor polish of cover trigger tests (#157397) 2025-11-27 09:57:03 +01:00
starkillerOG
a00923c48b Bump reolink-aio to 0.16.6 (#157399) 2025-11-27 09:53:25 +01:00
Sarah Seidman
7480d59f0f Normalize input for Droplet pairing code (#157361) 2025-11-27 08:36:30 +01:00
Erik Montnemery
4c8d9ed401 Adjust type hints in sensor group (#157373) 2025-11-27 08:34:16 +01:00
Lukas
eef10c59db Pooldose bump api 0.8.0 (new) (#157381) 2025-11-27 08:33:32 +01:00
dependabot[bot]
a1a1f8dd77 Bump docker/metadata-action from 5.5.1 to 5.9.0 (#157395) 2025-11-27 07:26:58 +01:00
dependabot[bot]
c75a5c5151 Bump docker/setup-buildx-action from 3.5.0 to 3.11.1 (#157396) 2025-11-27 07:25:16 +01:00
Allen Porter
cdaaa2bd8f Update fitbit to use new asyncio client library for device list (#157308)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-27 00:23:49 -05:00
Allen Porter
bd84dac8fb Update roborock test typing (#157370) 2025-11-27 00:21:48 -05:00
Allen Porter
42cbeca5b0 Remove old roborock map storage (#157379) 2025-11-27 00:21:04 -05:00
Allen Porter
ad0a498d10 Bump python-roborock to 3.8.1 (#157376) 2025-11-26 16:12:19 -08:00
Jan Bouwhuis
973405822b Move translatable URL out of strings.json for knx integration (#155244) 2025-11-26 23:09:59 +01:00
Franck Nijhof
b883d2f519 Bump version to 2026.1.0dev0 2025-11-26 17:15:29 +00:00
70 changed files with 2453 additions and 448 deletions

View File

@@ -190,7 +190,8 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install Cosign
- &install_cosign
name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
@@ -353,10 +354,7 @@ jobs:
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.2.3"
- *install_cosign
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
@@ -393,7 +391,7 @@ jobs:
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
- name: Generate Docker metadata
id: meta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
with:
images: ${{ matrix.registry }}/home-assistant
sep-tags: ","
@@ -407,7 +405,7 @@ jobs:
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27 # v3.7.1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
- name: Copy architecture images to DockerHub
if: matrix.registry == 'docker.io/homeassistant'

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 2
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.12"
HA_SHORT_VERSION: "2026.1"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
# 10.3 is the oldest supported version

View File

@@ -35,25 +35,22 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
USER vscode
COPY .python-version ./
RUN uv python install
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
RUN uv venv $VIRTUAL_ENV
RUN --mount=type=bind,source=.python-version,target=.python-version \
uv python install \
&& uv venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
WORKDIR /tmp
# Setup hass-release
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
&& uv pip install -e ~/hass-release/
# Install Python dependencies from requirements
COPY requirements.txt ./
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
RUN uv pip install -r requirements.txt
COPY requirements_test.txt requirements_test_pre_commit.txt ./
RUN uv pip install -r requirements_test.txt
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
uv pip install -r requirements.txt -r requirements_test.txt
WORKDIR /workspaces

View File

@@ -583,7 +583,7 @@ class AnthropicBaseLLMEntity(Entity):
identifiers={(DOMAIN, subentry.subentry_id)},
name=subentry.title,
manufacturer="Anthropic",
model="Claude",
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
entry_type=dr.DeviceEntryType.SERVICE,
)

View File

@@ -17,8 +17,12 @@ from homeassistant.components.media_player import (
class BangOlufsenSource:
"""Class used for associating device source ids with friendly names. May not include all sources."""
DEEZER: Final[Source] = Source(name="Deezer", id="deezer")
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio")
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
TIDAL: Final[Source] = Source(name="Tidal", id="tidal")
UNKNOWN: Final[Source] = Source(name="Unknown Source", id="unknown")
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
@@ -78,6 +82,16 @@ class BangOlufsenModel(StrEnum):
BEOREMOTE_ONE = "Beoremote One"
class BangOlufsenAttribute(StrEnum):
"""Enum for extra_state_attribute keys."""
BEOLINK = "beolink"
BEOLINK_PEERS = "peers"
BEOLINK_SELF = "self"
BEOLINK_LEADER = "leader"
BEOLINK_LISTENERS = "listeners"
# Physical "buttons" on devices
class BangOlufsenButtons(StrEnum):
"""Enum for device buttons."""

View File

@@ -82,6 +82,7 @@ from .const import (
FALLBACK_SOURCES,
MANUFACTURER,
VALID_MEDIA_TYPES,
BangOlufsenAttribute,
BangOlufsenMediaType,
BangOlufsenSource,
WebsocketNotification,
@@ -224,7 +225,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
# Beolink compatible sources
self._beolink_sources: dict[str, bool] = {}
self._remote_leader: BeolinkLeader | None = None
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
# Extra state attributes:
# Beolink: peer(s), listener(s), leader and self
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
async def async_added_to_hass(self) -> None:
@@ -436,7 +438,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
await self._async_update_beolink()
async def _async_update_beolink(self) -> None:
"""Update the current Beolink leader, listeners, peers and self."""
"""Update the current Beolink leader, listeners, peers and self.
Updates Home Assistant state.
"""
self._beolink_attributes = {}
@@ -445,18 +450,24 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
# Add Beolink self
self._beolink_attributes = {
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
BangOlufsenAttribute.BEOLINK: {
BangOlufsenAttribute.BEOLINK_SELF: {
self.device_entry.name: self._beolink_jid
}
}
}
# Add Beolink peers
peers = await self._client.get_beolink_peers()
if len(peers) > 0:
self._beolink_attributes["beolink"]["peers"] = {}
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
BangOlufsenAttribute.BEOLINK_PEERS
] = {}
for peer in peers:
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
peer.jid
)
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
BangOlufsenAttribute.BEOLINK_PEERS
][peer.friendly_name] = peer.jid
# Add Beolink listeners / leader
self._remote_leader = self._playback_metadata.remote_leader
@@ -477,7 +488,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
# Add self
group_members.append(self.entity_id)
self._beolink_attributes["beolink"]["leader"] = {
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
BangOlufsenAttribute.BEOLINK_LEADER
] = {
self._remote_leader.friendly_name: self._remote_leader.jid,
}
@@ -514,9 +527,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
beolink_listener.jid
)
break
self._beolink_attributes["beolink"]["listeners"] = (
beolink_listeners_attribute
)
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
BangOlufsenAttribute.BEOLINK_LISTENERS
] = beolink_listeners_attribute
self._attr_group_members = group_members
@@ -615,11 +628,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
return None
@property
def media_content_type(self) -> str:
def media_content_type(self) -> MediaType | str | None:
"""Return the current media type."""
# Hard to determine content type
if self._source_change.id == BangOlufsenSource.URI_STREAMER.id:
return MediaType.URL
content_type = {
BangOlufsenSource.URI_STREAMER.id: MediaType.URL,
BangOlufsenSource.DEEZER.id: BangOlufsenMediaType.DEEZER,
BangOlufsenSource.TIDAL.id: BangOlufsenMediaType.TIDAL,
BangOlufsenSource.NET_RADIO.id: BangOlufsenMediaType.RADIO,
}
# Hard to determine content type.
if self._source_change.id in content_type:
return content_type[self._source_change.id]
return MediaType.MUSIC
@property
@@ -632,6 +652,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
"""Return the current playback progress."""
return self._playback_progress.progress
@property
def media_content_id(self) -> str | None:
"""Return internal ID of Deezer, Tidal and radio stations."""
return self._playback_metadata.source_internal_id
@property
def media_image_url(self) -> str | None:
"""Return URL of the currently playing music."""

View File

@@ -8,6 +8,10 @@ from typing import Any
from pycoolmasternet_async import SWING_MODES
from homeassistant.components.climate import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
@@ -31,7 +35,16 @@ CM_TO_HA_STATE = {
HA_STATE_TO_CM = {value: key for key, value in CM_TO_HA_STATE.items()}
FAN_MODES = ["low", "med", "high", "auto"]
CM_TO_HA_FAN = {
"low": FAN_LOW,
"med": FAN_MEDIUM,
"high": FAN_HIGH,
"auto": FAN_AUTO,
}
HA_FAN_TO_CM = {value: key for key, value in CM_TO_HA_FAN.items()}
FAN_MODES = list(CM_TO_HA_FAN.values())
_LOGGER = logging.getLogger(__name__)
@@ -111,7 +124,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
@property
def fan_mode(self):
"""Return the fan setting."""
return self._unit.fan_speed
return CM_TO_HA_FAN[self._unit.fan_speed]
@property
def fan_modes(self):
@@ -138,7 +151,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new fan mode."""
_LOGGER.debug("Setting fan mode of %s to %s", self.unique_id, fan_mode)
self._unit = await self._unit.set_fan_speed(fan_mode)
self._unit = await self._unit.set_fan_speed(HA_FAN_TO_CM[fan_mode])
self.async_write_ha_state()
async def async_set_swing_mode(self, swing_mode: str) -> None:

View File

@@ -1,22 +1,30 @@
"""API for fitbit bound to Home Assistant OAuth."""
from abc import ABC, abstractmethod
from collections.abc import Callable
from collections.abc import Awaitable, Callable
import logging
from typing import Any, cast
from fitbit import Fitbit
from fitbit.exceptions import HTTPException, HTTPUnauthorized
from fitbit_web_api import ApiClient, Configuration, DevicesApi
from fitbit_web_api.exceptions import (
ApiException,
OpenApiException,
UnauthorizedException,
)
from fitbit_web_api.models.device import Device
from requests.exceptions import ConnectionError as RequestsConnectionError
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util.unit_system import METRIC_SYSTEM
from .const import FitbitUnitSystem
from .exceptions import FitbitApiException, FitbitAuthException
from .model import FitbitDevice, FitbitProfile
from .model import FitbitProfile
_LOGGER = logging.getLogger(__name__)
@@ -58,6 +66,14 @@ class FitbitApi(ABC):
expires_at=float(token[CONF_EXPIRES_AT]),
)
async def _async_get_fitbit_web_api(self) -> ApiClient:
"""Create and return an ApiClient configured with the current access token."""
token = await self.async_get_access_token()
configuration = Configuration()
configuration.pool_manager = async_get_clientsession(self._hass)
configuration.access_token = token[CONF_ACCESS_TOKEN]
return ApiClient(configuration)
async def async_get_user_profile(self) -> FitbitProfile:
"""Return the user profile from the API."""
if self._profile is None:
@@ -94,21 +110,13 @@ class FitbitApi(ABC):
return FitbitUnitSystem.METRIC
return FitbitUnitSystem.EN_US
async def async_get_devices(self) -> list[FitbitDevice]:
"""Return available devices."""
client = await self._async_get_client()
devices: list[dict[str, str]] = await self._run(client.get_devices)
async def async_get_devices(self) -> list[Device]:
"""Return available devices using fitbit-web-api."""
client = await self._async_get_fitbit_web_api()
devices_api = DevicesApi(client)
devices: list[Device] = await self._run_async(devices_api.get_devices)
_LOGGER.debug("get_devices=%s", devices)
return [
FitbitDevice(
id=device["id"],
device_version=device["deviceVersion"],
battery_level=int(device["batteryLevel"]),
battery=device["battery"],
type=device["type"],
)
for device in devices
]
return devices
async def async_get_latest_time_series(self, resource_type: str) -> dict[str, Any]:
"""Return the most recent value from the time series for the specified resource type."""
@@ -140,6 +148,20 @@ class FitbitApi(ABC):
_LOGGER.debug("Error from fitbit API: %s", err)
raise FitbitApiException("Error from fitbit API") from err
async def _run_async[_T](self, func: Callable[[], Awaitable[_T]]) -> _T:
"""Run client command."""
try:
return await func()
except UnauthorizedException as err:
_LOGGER.debug("Unauthorized error from fitbit API: %s", err)
raise FitbitAuthException("Authentication error from fitbit API") from err
except ApiException as err:
_LOGGER.debug("Error from fitbit API: %s", err)
raise FitbitApiException("Error from fitbit API") from err
except OpenApiException as err:
_LOGGER.debug("Error communicating with fitbit API: %s", err)
raise FitbitApiException("Communication error from fitbit API") from err
class OAuthFitbitApi(FitbitApi):
"""Provide fitbit authentication tied to an OAuth2 based config entry."""

View File

@@ -6,6 +6,8 @@ import datetime
import logging
from typing import Final
from fitbit_web_api.models.device import Device
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
@@ -13,7 +15,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .api import FitbitApi
from .exceptions import FitbitApiException, FitbitAuthException
from .model import FitbitDevice
_LOGGER = logging.getLogger(__name__)
@@ -23,7 +24,7 @@ TIMEOUT = 10
type FitbitConfigEntry = ConfigEntry[FitbitData]
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, Device]]):
"""Coordinator for fetching fitbit devices from the API."""
config_entry: FitbitConfigEntry
@@ -41,7 +42,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
)
self._api = api
async def _async_update_data(self) -> dict[str, FitbitDevice]:
async def _async_update_data(self) -> dict[str, Device]:
"""Fetch data from API endpoint."""
async with asyncio.timeout(TIMEOUT):
try:
@@ -50,7 +51,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
raise ConfigEntryAuthFailed(err) from err
except FitbitApiException as err:
raise UpdateFailed(err) from err
return {device.id: device for device in devices}
return {device.id: device for device in devices if device.id is not None}
@dataclass

View File

@@ -6,6 +6,6 @@
"dependencies": ["application_credentials", "http"],
"documentation": "https://www.home-assistant.io/integrations/fitbit",
"iot_class": "cloud_polling",
"loggers": ["fitbit"],
"requirements": ["fitbit==0.3.1"]
"loggers": ["fitbit", "fitbit_web_api"],
"requirements": ["fitbit==0.3.1", "fitbit-web-api==2.13.5"]
}

View File

@@ -21,26 +21,6 @@ class FitbitProfile:
"""The locale defined in the user's Fitbit account settings."""
@dataclass
class FitbitDevice:
"""Device from the Fitbit API response."""
id: str
"""The device ID."""
device_version: str
"""The product name of the device."""
battery_level: int
"""The battery level as a percentage."""
battery: str
"""Returns the battery level of the device."""
type: str
"""The type of the device such as TRACKER or SCALE."""
@dataclass
class FitbitConfig:
"""Information from the fitbit ConfigEntry data."""

View File

@@ -8,6 +8,8 @@ import datetime
import logging
from typing import Any, Final, cast
from fitbit_web_api.models.device import Device
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -32,7 +34,7 @@ from .api import FitbitApi
from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem
from .coordinator import FitbitConfigEntry, FitbitDeviceCoordinator
from .exceptions import FitbitApiException, FitbitAuthException
from .model import FitbitDevice, config_from_entry_data
from .model import config_from_entry_data
_LOGGER: Final = logging.getLogger(__name__)
@@ -657,7 +659,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
coordinator: FitbitDeviceCoordinator,
user_profile_id: str,
description: FitbitSensorEntityDescription,
device: FitbitDevice,
device: Device,
enable_default_override: bool,
) -> None:
"""Initialize the Fitbit sensor."""
@@ -677,7 +679,9 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
@property
def icon(self) -> str | None:
"""Icon to use in the frontend, if any."""
if battery_level := BATTERY_LEVELS.get(self.device.battery):
if self.device.battery is not None and (
battery_level := BATTERY_LEVELS.get(self.device.battery)
):
return icon_for_battery_level(battery_level=battery_level)
return self.entity_description.icon
@@ -697,7 +701,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.device = self.coordinator.data[self.device.id]
self.device = self.coordinator.data[cast(str, self.device.id)]
self._attr_native_value = self.device.battery
self.async_write_ha_state()
@@ -715,7 +719,7 @@ class FitbitBatteryLevelSensor(
coordinator: FitbitDeviceCoordinator,
user_profile_id: str,
description: FitbitSensorEntityDescription,
device: FitbitDevice,
device: Device,
) -> None:
"""Initialize the Fitbit sensor."""
super().__init__(coordinator)
@@ -736,6 +740,6 @@ class FitbitBatteryLevelSensor(
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.device = self.coordinator.data[self.device.id]
self.device = self.coordinator.data[cast(str, self.device.id)]
self._attr_native_value = self.device.battery_level
self.async_write_ha_state()

View File

@@ -132,7 +132,6 @@
"heavily_polluted": "Heavily polluted",
"heavy_air_pollution": "Heavy air pollution",
"high_air_pollution": "High air pollution",
"high_air_quality": "High air pollution",
"high_health_risk": "High health risk",
"horrible_air_quality": "Horrible air quality",
"light_air_pollution": "Light air pollution",
@@ -165,20 +164,18 @@
"slightly_polluted": "Slightly polluted",
"sufficient_air_quality": "Sufficient air quality",
"unfavorable_air_quality": "Unfavorable air quality",
"unfavorable_sensitive": "Unfavorable air quality for sensitive groups",
"unfavorable_air_quality_for_sensitive_groups": "Unfavorable air quality for sensitive groups",
"unhealthy_air_quality": "Unhealthy air quality",
"unhealthy_sensitive": "Unhealthy air quality for sensitive groups",
"unsatisfactory_air_quality": "Unsatisfactory air quality",
"very_bad_air_quality": "Very bad air quality",
"very_good_air_quality": "Very good air quality",
"very_high_air_pollution": "Very high air pollution",
"very_high_air_quality": "Very High air pollution",
"very_high_health_risk": "Very high health risk",
"very_low_air_pollution": "Very low air pollution",
"very_polluted": "Very polluted",
"very_poor_air_quality": "Very poor air quality",
"very_unfavorable_air_quality": "Very unfavorable air quality",
"very_unhealthy": "Very unhealthy air quality",
"very_unhealthy_air_quality": "Very unhealthy air quality",
"warning_air_pollution": "Warning level air pollution"
}

View File

@@ -53,7 +53,7 @@ from homeassistant.helpers.issue_registry import (
async_create_issue,
async_delete_issue,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN
from .entity import GroupEntity
@@ -374,7 +374,7 @@ class SensorGroup(GroupEntity, SensorEntity):
def async_update_group_state(self) -> None:
"""Query all members and determine the sensor group state."""
self.calculate_state_attributes(self._get_valid_entities())
states: list[StateType] = []
states: list[str] = []
valid_units = self._valid_units
valid_states: list[bool] = []
sensor_values: list[tuple[str, float, State]] = []

View File

@@ -211,7 +211,7 @@ async def ws_start_preview(
@callback
def async_preview_updated(
last_exception: Exception | None, state: str, attributes: Mapping[str, Any]
last_exception: BaseException | None, state: str, attributes: Mapping[str, Any]
) -> None:
"""Forward config entry state events to websocket."""
if last_exception:

View File

@@ -241,7 +241,9 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
async def async_start_preview(
self,
preview_callback: Callable[[Exception | None, str, Mapping[str, Any]], None],
preview_callback: Callable[
[BaseException | None, str, Mapping[str, Any]], None
],
) -> CALLBACK_TYPE:
"""Render a preview."""

View File

@@ -39,6 +39,10 @@ if TYPE_CHECKING:
_LOGGER = logging.getLogger(__name__)
_DESCRIPTION_PLACEHOLDERS = {
"sensor_value_types_url": "https://www.home-assistant.io/integrations/knx/#value-types"
}
@callback
def async_setup_services(hass: HomeAssistant) -> None:
@@ -48,6 +52,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
SERVICE_KNX_SEND,
service_send_to_knx_bus,
schema=SERVICE_KNX_SEND_SCHEMA,
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
)
hass.services.async_register(
@@ -63,6 +68,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
SERVICE_KNX_EVENT_REGISTER,
service_event_register_modify,
schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA,
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
)
async_register_admin_service(
@@ -71,6 +77,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
SERVICE_KNX_EXPOSURE_REGISTER,
service_exposure_register_modify,
schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA,
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
)
async_register_admin_service(

View File

@@ -674,7 +674,7 @@
"name": "Remove event registration"
},
"type": {
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see {sensor_value_types_url}).",
"name": "Value type"
}
},
@@ -704,7 +704,7 @@
"name": "Remove exposure"
},
"type": {
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see {sensor_value_types_url}).",
"name": "Value type"
}
},
@@ -740,7 +740,7 @@
"name": "Send as Response"
},
"type": {
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see {sensor_value_types_url}).",
"name": "Value type"
}
},

View File

@@ -432,7 +432,7 @@ class NumberDeviceClass(StrEnum):
Unit of measurement: UnitOfVolumeFlowRate
- SI / metric: `m³/h`, `m³/min`, `m³/s`, `L/h`, `L/min`, `L/s`, `mL/s`
- USCS / imperial: `ft³/min`, `gal/min`
- USCS / imperial: `ft³/min`, `gal/min`, `gal/d`
"""
WATER = "water"

View File

@@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/pooldose",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["python-pooldose==0.7.8"]
"requirements": ["python-pooldose==0.8.0"]
}

View File

@@ -468,7 +468,7 @@ class SensorDeviceClass(StrEnum):
Unit of measurement: UnitOfVolumeFlowRate
- SI / metric: `m³/h`, `m³/min`, `m³/s`, `L/h`, `L/min`, `L/s`, `mL/s`
- USCS / imperial: `ft³/min`, `gal/min`
- USCS / imperial: `ft³/min`, `gal/min`, `gal/d`
"""
WATER = "water"

View File

@@ -394,7 +394,7 @@ def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str:
def warn_dip(
hass: HomeAssistant, entity_id: str, state: State, previous_fstate: float
) -> None:
"""Log a warning once if a sensor with state_class_total has a decreasing value.
"""Log a warning once if a sensor with state class TOTAL_INCREASING has a decreasing value.
The log will be suppressed until two dips have been seen to prevent warning due to
rounding issues with databases storing the state as a single precision float, which
@@ -415,12 +415,13 @@ def warn_dip(
return
_LOGGER.warning(
(
"Entity %s %shas state class total_increasing, but its state is not"
" strictly increasing. Triggered by state %s (%s) with last_updated set"
" to %s. Please %s"
"Entity %s %shas state class %s, but its state is not"
" strictly increasing. Triggered by state %s (previous %s) with"
" last_updated set to %s. Please %s"
),
entity_id,
f"from integration {domain} " if domain else "",
SensorStateClass.TOTAL_INCREASING,
state.state,
previous_fstate,
state.last_updated.isoformat(),
@@ -429,7 +430,7 @@ def warn_dip(
def warn_negative(hass: HomeAssistant, entity_id: str, state: State) -> None:
"""Log a warning once if a sensor with state_class_total has a negative value."""
"""Log a warning once if a sensor with state class TOTAL_INCREASING has a negative value."""
if WARN_NEGATIVE not in hass.data:
hass.data[WARN_NEGATIVE] = set()
if entity_id not in hass.data[WARN_NEGATIVE]:
@@ -438,11 +439,12 @@ def warn_negative(hass: HomeAssistant, entity_id: str, state: State) -> None:
domain = entity_info["domain"] if entity_info else None
_LOGGER.warning(
(
"Entity %s %shas state class total_increasing, but its state is "
"Entity %s %shas state class %s, but its state is "
"negative. Triggered by state %s with last_updated set to %s. Please %s"
),
entity_id,
f"from integration {domain} " if domain else "",
SensorStateClass.TOTAL_INCREASING,
state.state,
state.last_updated.isoformat(),
_suggest_report_issue(hass, entity_id),

View File

@@ -20,6 +20,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import SFRConfigEntry
from .entity import SFRCoordinatorEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class SFRBoxBinarySensorEntityDescription[_T](BinarySensorEntityDescription):
@@ -94,6 +97,4 @@ class SFRBoxBinarySensor[_T](SFRCoordinatorEntity[_T], BinarySensorEntity):
@property
def is_on(self) -> bool | None:
"""Return the native value of the device."""
if self.coordinator.data is None:
return None
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -24,6 +24,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import SFRConfigEntry
from .entity import SFREntity
# Coordinator is used to centralize the data updates
# but better to queue action calls to avoid conflicts
PARALLEL_UPDATES = 1
def with_error_wrapping[**_P, _R](
func: Callable[Concatenate[SFRBoxButton, _P], Awaitable[_R]],

View File

@@ -39,7 +39,10 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN):
VERSION = 1
_box: SFRBox
_config: dict[str, Any] = {}
def __init__(self) -> None:
"""Initialize SFR Box flow."""
self._config: dict[str, Any] = {}
async def async_step_user(
self, user_input: dict[str, str] | None = None
@@ -47,6 +50,7 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
box = SFRBox(
ip=user_input[CONF_HOST], client=async_get_clientsession(self.hass)
)
@@ -60,7 +64,6 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN):
assert system_info is not None
await self.async_set_unique_id(system_info.mac_addr)
self._abort_if_unique_id_configured()
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
self._box = box
self._config.update(user_input)
return await self.async_step_choose_auth()

View File

@@ -33,7 +33,7 @@ class SFRRuntimeData:
wan: SFRDataUpdateCoordinator[WanInfo]
class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT | None]):
class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
"""Coordinator to manage data updates."""
config_entry: SFRConfigEntry
@@ -57,9 +57,11 @@ class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT | None]):
update_interval=_SCAN_INTERVAL,
)
async def _async_update_data(self) -> _DataT | None:
async def _async_update_data(self) -> _DataT:
"""Update data."""
try:
return await self._method(self.box)
if data := await self._method(self.box):
return data
except SFRBoxError as err:
raise UpdateFailed from err
raise UpdateFailed("No data received from SFR Box")

View File

@@ -0,0 +1,113 @@
rules:
## Bronze
config-flow: done
test-before-configure: done
unique-config-entry: done
config-flow-test-coverage:
status: todo
comment: |
- test_config_flow_skip_auth -> I'd split the happy from the not happy flows
- We should test created mac address
runtime-data: done
test-before-setup: done
appropriate-polling: done
entity-unique-id: done
has-entity-name: done
entity-event-setup:
status: exempt
comment: local_polling without events
dependency-transparency: done
action-setup:
status: exempt
comment: There are no service actions
common-modules: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-actions:
status: exempt
comment: There are no service actions
brands: done
## Silver
config-entry-unloading: done
log-when-unavailable: done
entity-unavailable: done
action-exceptions: done
reauthentication-flow: done
parallel-updates: done
test-coverage:
status: todo
comment: |
- 93% on diagnostics / 92% on sensors, need to improve overall coverage
- you can use load_json_object_fixture
- It would be nice to use the snapshot helper as currently it would just throw everything in a list
- We also test the devices in each platform, kinda overkill
- assert not hass.data.get(DOMAIN) not needed
- We should use entity_registry_enabled_by_default instead to enable entities
integration-owner: done
docs-installation-parameters:
status: todo
comment: not yet documented
docs-configuration-parameters:
status: exempt
comment: No options flow
## Gold
entity-translations: done
entity-device-class:
status: todo
comment: |
What does DSL counter count?
What is the state of CRC?
line_status and training and net_infra and mode -> unknown shouldn't be an option and the entity should return None instead
devices:
status: todo
comment: MAC address can be set to the connections
entity-category: done
entity-disabled-by-default: done
discovery:
status: todo
comment: Should be possible
stale-devices: done
diagnostics: done
exception-translations:
status: todo
comment: not yet documented
icon-translations: done
reconfiguration-flow:
status: todo
comment: Need to be able to manually change the IP address
dynamic-devices: done
discovery-update-info:
status: todo
comment: Discovery is not yet implemented
repair-issues: done
docs-use-cases:
status: todo
comment: not yet documented
docs-supported-devices: done
docs-supported-functions: done
docs-data-update:
status: todo
comment: not yet documented
docs-known-limitations:
status: todo
comment: not yet documented
docs-troubleshooting:
status: todo
comment: not yet documented
docs-examples:
status: todo
comment: not yet documented
## Platinum
async-dependency:
status: done
comment: sfrbox-api is asynchronous
inject-websession:
status: done
comment: sfrbox-api uses injected aiohttp websession
strict-typing:
status: done
comment: sfrbox-api is fully typed, and integration uses strict typing

View File

@@ -26,6 +26,9 @@ from homeassistant.helpers.typing import StateType
from .coordinator import SFRConfigEntry
from .entity import SFRCoordinatorEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class SFRBoxSensorEntityDescription[_T](SensorEntityDescription):
@@ -250,6 +253,4 @@ class SFRBoxSensor[_T](SFRCoordinatorEntity[_T], SensorEntity):
@property
def native_value(self) -> StateType:
"""Return the native value of the device."""
if self.coordinator.data is None:
return None
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -79,6 +79,7 @@ from .utils import (
get_rpc_device_wakeup_period,
get_rpc_ws_url,
get_shelly_model_name,
is_rpc_ble_scanner_supported,
update_device_fw_info,
)
@@ -726,6 +727,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
"""Handle device connected."""
async with self._connection_lock:
if self.connected: # Already connected
LOGGER.debug("Device %s already connected", self.name)
return
self.connected = True
try:
@@ -743,10 +745,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
is updated.
"""
if not self.sleep_period:
if (
self.config_entry.runtime_data.rpc_supports_scripts
and not self.config_entry.runtime_data.rpc_zigbee_firmware
):
if is_rpc_ble_scanner_supported(self.config_entry):
await self._async_connect_ble_scanner()
else:
await self._async_setup_outbound_websocket()
@@ -776,6 +775,10 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
if await async_ensure_ble_enabled(self.device):
# BLE enable required a reboot, don't bother connecting
# the scanner since it will be disconnected anyway
LOGGER.debug(
"Device %s BLE enable required a reboot, skipping scanner connect",
self.name,
)
return
assert self.device_id is not None
self._disconnected_callbacks.append(
@@ -844,21 +847,14 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
"""Shutdown the coordinator."""
if self.device.connected:
try:
if not self.sleep_period:
if not self.sleep_period and is_rpc_ble_scanner_supported(
self.config_entry
):
await async_stop_scanner(self.device)
await super().shutdown()
except InvalidAuthError:
self.config_entry.async_start_reauth(self.hass)
return
except RpcCallError as err:
# Ignore 404 (No handler for) error
if err.code != 404:
LOGGER.debug(
"Error during shutdown for device %s: %s",
self.name,
err.message,
)
return
except DeviceConnectionError as err:
# If the device is restarting or has gone offline before
# the ping/pong timeout happens, the shutdown command

View File

@@ -994,3 +994,11 @@ def async_migrate_rpc_virtual_components_unique_ids(
}
return None
def is_rpc_ble_scanner_supported(entry: ConfigEntry) -> bool:
"""Return true if BLE scanner is supported."""
return (
entry.runtime_data.rpc_supports_scripts
and not entry.runtime_data.rpc_zigbee_firmware
)

View File

@@ -48,6 +48,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util
from . import TriggerUpdateCoordinator
from .entity import AbstractTemplateEntity
from .helpers import (
async_setup_template_entry,
async_setup_template_platform,
@@ -168,11 +169,27 @@ def async_create_preview_binary_sensor(
)
class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity):
class AbstractTemplateBinarySensor(
AbstractTemplateEntity, BinarySensorEntity, RestoreEntity
):
"""Representation of a template binary sensor features."""
_entity_id_format = ENTITY_ID_FORMAT
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
"""Initialize the features."""
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._template: template.Template = config[CONF_STATE]
self._delay_cancel: CALLBACK_TYPE | None = None
class StateBinarySensorEntity(TemplateEntity, AbstractTemplateBinarySensor):
"""A virtual binary sensor that triggers from another sensor."""
_attr_should_poll = False
_entity_id_format = ENTITY_ID_FORMAT
def __init__(
self,
@@ -182,19 +199,19 @@ class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity)
) -> None:
"""Initialize the Template binary sensor."""
TemplateEntity.__init__(self, hass, config, unique_id)
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._template: template.Template = config[CONF_STATE]
self._delay_cancel = None
AbstractTemplateBinarySensor.__init__(self, config)
self._delay_on = None
self._delay_on_raw = config.get(CONF_DELAY_ON)
self._delay_on_template = config.get(CONF_DELAY_ON)
self._delay_off = None
self._delay_off_raw = config.get(CONF_DELAY_OFF)
self._delay_off_template = config.get(CONF_DELAY_OFF)
async def async_added_to_hass(self) -> None:
"""Restore state."""
if (
(self._delay_on_raw is not None or self._delay_off_raw is not None)
(
self._delay_on_template is not None
or self._delay_off_template is not None
)
and (last_state := await self.async_get_last_state()) is not None
and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE)
):
@@ -206,20 +223,20 @@ class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity)
"""Set up templates."""
self.add_template_attribute("_state", self._template, None, self._update_state)
if self._delay_on_raw is not None:
if self._delay_on_template is not None:
try:
self._delay_on = cv.positive_time_period(self._delay_on_raw)
self._delay_on = cv.positive_time_period(self._delay_on_template)
except vol.Invalid:
self.add_template_attribute(
"_delay_on", self._delay_on_raw, cv.positive_time_period
"_delay_on", self._delay_on_template, cv.positive_time_period
)
if self._delay_off_raw is not None:
if self._delay_off_template is not None:
try:
self._delay_off = cv.positive_time_period(self._delay_off_raw)
self._delay_off = cv.positive_time_period(self._delay_off_template)
except vol.Invalid:
self.add_template_attribute(
"_delay_off", self._delay_off_raw, cv.positive_time_period
"_delay_off", self._delay_off_template, cv.positive_time_period
)
super()._async_setup_templates()
@@ -259,12 +276,10 @@ class StateBinarySensorEntity(TemplateEntity, BinarySensorEntity, RestoreEntity)
self._delay_cancel = async_call_later(self.hass, delay, _set_state)
class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity):
class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
"""Sensor entity based on trigger data."""
_entity_id_format = ENTITY_ID_FORMAT
domain = BINARY_SENSOR_DOMAIN
extra_template_keys = (CONF_STATE,)
def __init__(
self,
@@ -273,7 +288,8 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity
config: dict,
) -> None:
"""Initialize the entity."""
super().__init__(hass, coordinator, config)
TriggerEntity.__init__(self, hass, coordinator, config)
AbstractTemplateBinarySensor.__init__(self, config)
for key in (CONF_STATE, CONF_DELAY_ON, CONF_DELAY_OFF, CONF_AUTO_OFF):
if isinstance(config.get(key), template.Template):
@@ -282,7 +298,6 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity
self._last_delay_from: bool | None = None
self._last_delay_to: bool | None = None
self._delay_cancel: CALLBACK_TYPE | None = None
self._auto_off_cancel: CALLBACK_TYPE | None = None
self._auto_off_time: datetime | None = None

View File

@@ -26,7 +26,12 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.typing import ConfigType
from .const import DEFAULT_PATH, DEFAULT_SSL, DOMAIN
@@ -93,6 +98,19 @@ async def async_setup_entry(
except (AuthenticationError, UnknownError) as error:
raise ConfigEntryAuthFailed from error
protocol: Final = "https" if config_entry.data[CONF_SSL] else "http"
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(DOMAIN, config_entry.entry_id)},
manufacturer="Transmission",
entry_type=DeviceEntryType.SERVICE,
sw_version=api.server_version,
configuration_url=(
f"{protocol}://{config_entry.data[CONF_HOST]}:{config_entry.data[CONF_PORT]}"
),
)
coordinator = TransmissionDataUpdateCoordinator(hass, config_entry, api)
await hass.async_add_executor_job(coordinator.init_torrent_list)

View File

@@ -26,5 +26,4 @@ class TransmissionEntity(CoordinatorEntity[TransmissionDataUpdateCoordinator]):
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
manufacturer="Transmission",
)

View File

@@ -1,4 +1,43 @@
{
"entity": {
"sensor": {
"active_torrents": {
"default": "mdi:counter"
},
"completed_torrents": {
"default": "mdi:counter"
},
"download_speed": {
"default": "mdi:cloud-download"
},
"paused_torrents": {
"default": "mdi:counter"
},
"started_torrents": {
"default": "mdi:counter"
},
"total_torrents": {
"default": "mdi:counter"
},
"transmission_status": {
"default": "mdi:information-outline"
},
"upload_speed": {
"default": "mdi:cloud-upload"
}
},
"switch": {
"on_off": {
"default": "mdi:cloud",
"state": {
"off": "mdi:cloud-off"
}
},
"turtle_mode": {
"default": "mdi:tortoise"
}
}
},
"services": {
"add_torrent": {
"service": "mdi:download"

View File

@@ -30,18 +30,12 @@ rules:
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
parallel-updates: done
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Change to mock_setup_entry to avoid repetition when expanding tests.
test-coverage: done
# Gold
devices:
status: todo
comment: |
Add additional device detail including link to ui.
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
@@ -61,10 +55,7 @@ rules:
Speed sensors change so frequently that disabling by default may be appropriate.
entity-translations: done
exception-translations: done
icon-translations:
status: todo
comment: |
Add icons for sensors & switches.
icon-translations: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo

View File

@@ -29,6 +29,8 @@ from .const import (
from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator
from .entity import TransmissionEntity
PARALLEL_UPDATES = 0
MODES: dict[str, list[str] | None] = {
"started_torrents": ["downloading"],
"completed_torrents": ["seeding"],

View File

@@ -11,6 +11,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator
from .entity import TransmissionEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class TransmissionSwitchEntityDescription(SwitchEntityDescription):

View File

@@ -754,6 +754,7 @@ class ConfigEntry[_DataT = Any]:
error_reason_translation_key = None
error_reason_translation_placeholders = None
result = False
try:
with async_start_setup(
hass, integration=self.domain, group=self.entry_id, phase=setup_phase
@@ -775,8 +776,6 @@ class ConfigEntry[_DataT = Any]:
self.domain,
error_reason,
)
await self._async_process_on_unload(hass)
result = False
except ConfigEntryAuthFailed as exc:
message = str(exc)
auth_base_message = "could not authenticate"
@@ -792,9 +791,7 @@ class ConfigEntry[_DataT = Any]:
self.domain,
auth_message,
)
await self._async_process_on_unload(hass)
self.async_start_reauth(hass)
result = False
except ConfigEntryNotReady as exc:
message = str(exc)
error_reason_translation_key = exc.translation_key
@@ -835,14 +832,39 @@ class ConfigEntry[_DataT = Any]:
functools.partial(self._async_setup_again, hass),
)
await self._async_process_on_unload(hass)
return
# pylint: disable-next=broad-except
except (asyncio.CancelledError, SystemExit, Exception):
except asyncio.CancelledError:
# We want to propagate CancelledError if we are being cancelled.
if (task := asyncio.current_task()) and task.cancelling() > 0:
_LOGGER.exception(
"Setup of config entry '%s' for %s integration cancelled",
self.title,
self.domain,
)
self._async_set_state(
hass,
ConfigEntryState.SETUP_ERROR,
None,
None,
None,
)
raise
# This was not a "real" cancellation, log it and treat as a normal error.
_LOGGER.exception(
"Error setting up entry %s for %s", self.title, integration.domain
)
result = False
# pylint: disable-next=broad-except
except (SystemExit, Exception):
_LOGGER.exception(
"Error setting up entry %s for %s", self.title, integration.domain
)
finally:
if not result and domain_is_integration:
await self._async_process_on_unload(hass)
#
# After successfully calling async_setup_entry, it is important that this function

View File

@@ -15,9 +15,9 @@ if TYPE_CHECKING:
from .helpers.typing import NoEventData
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2025
MINOR_VERSION: Final = 12
PATCH_VERSION: Final = "0b2"
MAJOR_VERSION: Final = 2026
MINOR_VERSION: Final = 1
PATCH_VERSION: Final = "0.dev0"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
@@ -653,6 +653,7 @@ class UnitOfVolumeFlowRate(StrEnum):
LITERS_PER_SECOND = "L/s"
GALLONS_PER_HOUR = "gal/h"
GALLONS_PER_MINUTE = "gal/min"
GALLONS_PER_DAY = "gal/d"
MILLILITERS_PER_SECOND = "mL/s"

View File

@@ -131,7 +131,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
self._request_refresh_task: asyncio.TimerHandle | None = None
self._retry_after: float | None = None
self.last_update_success = True
self.last_exception: Exception | None = None
self.last_exception: BaseException | None = None
if request_refresh_debouncer is None:
request_refresh_debouncer = Debouncer(
@@ -492,8 +492,16 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
self.config_entry.async_start_reauth(self.hass)
except NotImplementedError as err:
self.last_exception = err
self.last_update_success = False
raise
except asyncio.CancelledError as err:
self.last_exception = err
self.last_update_success = False
if (task := asyncio.current_task()) and task.cancelling() > 0:
raise
except Exception as err:
self.last_exception = err
self.last_update_success = False

View File

@@ -69,7 +69,8 @@ _HECTARE_TO_M2 = 100 * 100 # 1 hectare = 10,000 m²
_MIN_TO_SEC = 60 # 1 min = 60 seconds
_HRS_TO_MINUTES = 60 # 1 hr = 60 minutes
_HRS_TO_SECS = _HRS_TO_MINUTES * _MIN_TO_SEC # 1 hr = 60 minutes = 3600 seconds
_DAYS_TO_SECS = 24 * _HRS_TO_SECS # 1 day = 24 hours = 86400 seconds
_DAYS_TO_HRS = 24 # 1 day = 24 hours
_DAYS_TO_SECS = _DAYS_TO_HRS * _HRS_TO_SECS # 1 day = 24 hours = 86400 seconds
# Energy conversion constants
_WH_TO_J = 3600 # 1 Wh = 3600 J
@@ -852,6 +853,7 @@ class VolumeFlowRateConverter(BaseUnitConverter):
UnitOfVolumeFlowRate.GALLONS_PER_HOUR: 1 / _GALLON_TO_CUBIC_METER,
UnitOfVolumeFlowRate.GALLONS_PER_MINUTE: 1
/ (_HRS_TO_MINUTES * _GALLON_TO_CUBIC_METER),
UnitOfVolumeFlowRate.GALLONS_PER_DAY: _DAYS_TO_HRS / _GALLON_TO_CUBIC_METER,
UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND: 1
/ (_HRS_TO_SECS * _ML_TO_CUBIC_METER),
}
@@ -865,6 +867,7 @@ class VolumeFlowRateConverter(BaseUnitConverter):
UnitOfVolumeFlowRate.LITERS_PER_SECOND,
UnitOfVolumeFlowRate.GALLONS_PER_HOUR,
UnitOfVolumeFlowRate.GALLONS_PER_MINUTE,
UnitOfVolumeFlowRate.GALLONS_PER_DAY,
UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
}

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2025.12.0b2"
version = "2026.1.0.dev0"
license = "Apache-2.0"
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
description = "Open-source home automation platform running on Python 3."
@@ -830,7 +830,7 @@ ignore = [
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
"FURB116"
"FURB116",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]

5
requirements_all.txt generated
View File

@@ -958,6 +958,9 @@ fing_agent_api==1.0.3
# homeassistant.components.fints
fints==3.1.0
# homeassistant.components.fitbit
fitbit-web-api==2.13.5
# homeassistant.components.fitbit
fitbit==0.3.1
@@ -2548,7 +2551,7 @@ python-overseerr==0.7.1
python-picnic-api2==1.3.1
# homeassistant.components.pooldose
python-pooldose==0.7.8
python-pooldose==0.8.0
# homeassistant.components.rabbitair
python-rabbitair==0.0.8

View File

@@ -846,6 +846,9 @@ fing_agent_api==1.0.3
# homeassistant.components.fints
fints==3.1.0
# homeassistant.components.fitbit
fitbit-web-api==2.13.5
# homeassistant.components.fitbit
fitbit==0.3.1
@@ -2129,7 +2132,7 @@ python-overseerr==0.7.1
python-picnic-api2==1.3.1
# homeassistant.components.pooldose
python-pooldose==0.7.8
python-pooldose==0.8.0
# homeassistant.components.rabbitair
python-rabbitair==0.0.8

View File

@@ -9,9 +9,8 @@ cd "$(realpath "$(dirname "$0")/..")"
echo "Installing development dependencies..."
uv pip install \
-e . \
-r requirements_test.txt \
-r requirements_test_all.txt \
colorlog \
--constraint homeassistant/package_constraints.txt \
--upgrade \
--config-settings editable_mode=compat

View File

@@ -857,7 +857,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
"sesame",
"seven_segments",
"seventeentrack",
"sfr_box",
"sharkiq",
"shell_command",
"shodan",

View File

@@ -2,7 +2,14 @@
from unittest.mock import AsyncMock
from adguardhome.update import AdGuardHomeAvailableUpdate
from adguardhome import AdGuardHome
from adguardhome.filtering import AdGuardHomeFiltering
from adguardhome.parental import AdGuardHomeParental
from adguardhome.querylog import AdGuardHomeQueryLog
from adguardhome.safebrowsing import AdGuardHomeSafeBrowsing
from adguardhome.safesearch import AdGuardHomeSafeSearch
from adguardhome.stats import AdGuardHomeStats
from adguardhome.update import AdGuardHomeAvailableUpdate, AdGuardHomeUpdate
import pytest
from homeassistant.components.adguard import DOMAIN
@@ -38,7 +45,14 @@ def mock_config_entry() -> MockConfigEntry:
@pytest.fixture
async def mock_adguard() -> AsyncMock:
"""Fixture for setting up the component."""
adguard_mock = AsyncMock()
adguard_mock = AsyncMock(spec=AdGuardHome)
adguard_mock.filtering = AsyncMock(spec=AdGuardHomeFiltering)
adguard_mock.parental = AsyncMock(spec=AdGuardHomeParental)
adguard_mock.querylog = AsyncMock(spec=AdGuardHomeQueryLog)
adguard_mock.safebrowsing = AsyncMock(spec=AdGuardHomeSafeBrowsing)
adguard_mock.safesearch = AsyncMock(spec=AdGuardHomeSafeSearch)
adguard_mock.stats = AsyncMock(spec=AdGuardHomeStats)
adguard_mock.update = AsyncMock(spec=AdGuardHomeUpdate)
# static properties
adguard_mock.host = "127.0.0.1"
@@ -48,6 +62,10 @@ async def mock_adguard() -> AsyncMock:
# async method mocks
adguard_mock.version = AsyncMock(return_value="v0.107.50")
adguard_mock.protection_enabled = AsyncMock(return_value=True)
adguard_mock.parental.enabled = AsyncMock(return_value=True)
adguard_mock.safesearch.enabled = AsyncMock(return_value=True)
adguard_mock.safebrowsing.enabled = AsyncMock(return_value=True)
adguard_mock.stats.dns_queries = AsyncMock(return_value=666)
adguard_mock.stats.blocked_filtering = AsyncMock(return_value=1337)
adguard_mock.stats.blocked_percentage = AsyncMock(return_value=200.75)
@@ -56,11 +74,8 @@ async def mock_adguard() -> AsyncMock:
adguard_mock.stats.replaced_safesearch = AsyncMock(return_value=18)
adguard_mock.stats.avg_processing_time = AsyncMock(return_value=31.41)
adguard_mock.filtering.rules_count = AsyncMock(return_value=100)
adguard_mock.filtering.add_url = AsyncMock()
adguard_mock.filtering.remove_url = AsyncMock()
adguard_mock.filtering.enable_url = AsyncMock()
adguard_mock.filtering.disable_url = AsyncMock()
adguard_mock.filtering.refresh = AsyncMock()
adguard_mock.filtering.enabled = AsyncMock(return_value=True)
adguard_mock.querylog.enabled = AsyncMock(return_value=True)
adguard_mock.update.update_available = AsyncMock(
return_value=AdGuardHomeAvailableUpdate(
new_version="v0.107.59",
@@ -70,6 +85,5 @@ async def mock_adguard() -> AsyncMock:
disabled=False,
)
)
adguard_mock.update.begin_update = AsyncMock()
return adguard_mock

View File

@@ -0,0 +1,289 @@
# serializer version: 1
# name: test_switch[switch.adguard_home_filtering-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_filtering',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Filtering',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'filtering',
'unique_id': 'adguard_127.0.0.1_3000_switch_filtering',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_filtering-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Filtering',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_filtering',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switch[switch.adguard_home_parental_control-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_parental_control',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Parental control',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'parental',
'unique_id': 'adguard_127.0.0.1_3000_switch_parental',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_parental_control-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Parental control',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_parental_control',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switch[switch.adguard_home_protection-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_protection',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Protection',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'protection',
'unique_id': 'adguard_127.0.0.1_3000_switch_protection',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_protection-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Protection',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_protection',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switch[switch.adguard_home_query_log-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_query_log',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Query log',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'query_log',
'unique_id': 'adguard_127.0.0.1_3000_switch_querylog',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_query_log-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Query log',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_query_log',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switch[switch.adguard_home_safe_browsing-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_safe_browsing',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Safe browsing',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'safe_browsing',
'unique_id': 'adguard_127.0.0.1_3000_switch_safebrowsing',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_safe_browsing-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Safe browsing',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_safe_browsing',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switch[switch.adguard_home_safe_search-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.adguard_home_safe_search',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Safe search',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'safe_search',
'unique_id': 'adguard_127.0.0.1_3000_switch_safesearch',
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.adguard_home_safe_search-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Safe search',
}),
'context': <ANY>,
'entity_id': 'switch.adguard_home_safe_search',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---

View File

@@ -0,0 +1,161 @@
"""Tests for the AdGuard Home switch entity."""
from collections.abc import Callable
import logging
from typing import Any
from unittest.mock import AsyncMock, patch
from adguardhome import AdGuardHomeError
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_switch(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
mock_adguard: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard switch platform."""
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.SWITCH]):
await setup_integration(hass, mock_config_entry, mock_adguard)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
@pytest.mark.parametrize(
("switch_name", "service", "call_assertion"),
[
(
"protection",
SERVICE_TURN_ON,
lambda mock: mock.enable_protection.assert_called_once(),
),
(
"protection",
SERVICE_TURN_OFF,
lambda mock: mock.disable_protection.assert_called_once(),
),
(
"parental_control",
SERVICE_TURN_ON,
lambda mock: mock.parental.enable.assert_called_once(),
),
(
"parental_control",
SERVICE_TURN_OFF,
lambda mock: mock.parental.disable.assert_called_once(),
),
(
"safe_search",
SERVICE_TURN_ON,
lambda mock: mock.safesearch.enable.assert_called_once(),
),
(
"safe_search",
SERVICE_TURN_OFF,
lambda mock: mock.safesearch.disable.assert_called_once(),
),
(
"safe_browsing",
SERVICE_TURN_ON,
lambda mock: mock.safebrowsing.enable.assert_called_once(),
),
(
"safe_browsing",
SERVICE_TURN_OFF,
lambda mock: mock.safebrowsing.disable.assert_called_once(),
),
(
"filtering",
SERVICE_TURN_ON,
lambda mock: mock.filtering.enable.assert_called_once(),
),
(
"filtering",
SERVICE_TURN_OFF,
lambda mock: mock.filtering.disable.assert_called_once(),
),
(
"query_log",
SERVICE_TURN_ON,
lambda mock: mock.querylog.enable.assert_called_once(),
),
(
"query_log",
SERVICE_TURN_OFF,
lambda mock: mock.querylog.disable.assert_called_once(),
),
],
)
async def test_switch_actions(
hass: HomeAssistant,
mock_adguard: AsyncMock,
mock_config_entry: MockConfigEntry,
switch_name: str,
service: str,
call_assertion: Callable[[AsyncMock], Any],
) -> None:
"""Test the adguard switch actions."""
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.SWITCH]):
await setup_integration(hass, mock_config_entry, mock_adguard)
await hass.services.async_call(
"switch",
service,
{ATTR_ENTITY_ID: f"switch.adguard_home_{switch_name}"},
blocking=True,
)
call_assertion(mock_adguard)
@pytest.mark.parametrize(
("service", "expected_message"),
[
(
SERVICE_TURN_ON,
"An error occurred while turning on AdGuard Home switch",
),
(
SERVICE_TURN_OFF,
"An error occurred while turning off AdGuard Home switch",
),
],
)
async def test_switch_action_failed(
hass: HomeAssistant,
mock_adguard: AsyncMock,
mock_config_entry: MockConfigEntry,
caplog: pytest.LogCaptureFixture,
service: str,
expected_message: str,
) -> None:
"""Test the adguard switch actions."""
caplog.set_level(logging.ERROR)
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.SWITCH]):
await setup_integration(hass, mock_config_entry, mock_adguard)
mock_adguard.enable_protection.side_effect = AdGuardHomeError("Boom")
mock_adguard.disable_protection.side_effect = AdGuardHomeError("Boom")
await hass.services.async_call(
"switch",
service,
{ATTR_ENTITY_ID: "switch.adguard_home_protection"},
blocking=True,
)
assert expected_message in caplog.text

View File

@@ -168,6 +168,7 @@ TEST_PLAYBACK_METADATA = PlaybackContentMetadata(
title="Test title",
total_duration_seconds=123,
track=1,
source_internal_id="123",
)
TEST_PLAYBACK_ERROR = PlaybackError(error="Test error")
TEST_PLAYBACK_PROGRESS = PlaybackProgress(progress=123)

View File

@@ -2,16 +2,16 @@
# name: test_async_beolink_allstandby
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -50,16 +50,16 @@
# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-3]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -71,7 +71,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -99,16 +99,16 @@
# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-3]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -120,7 +120,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -148,16 +148,16 @@
# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-2]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -169,7 +169,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -197,16 +197,16 @@
# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-2]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -218,7 +218,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -246,16 +246,16 @@
# name: test_async_beolink_join[service_parameters0-method_parameters0]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -294,16 +294,16 @@
# name: test_async_beolink_join[service_parameters1-method_parameters1]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -342,16 +342,16 @@
# name: test_async_beolink_join[service_parameters2-method_parameters2]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -390,16 +390,16 @@
# name: test_async_beolink_join_invalid[service_parameters0-expected_result0]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -438,16 +438,16 @@
# name: test_async_beolink_join_invalid[service_parameters1-expected_result1]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -486,16 +486,16 @@
# name: test_async_beolink_join_invalid[service_parameters2-expected_result2]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -534,16 +534,16 @@
# name: test_async_beolink_unexpand
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -582,16 +582,16 @@
# name: test_async_join_players[group_members0-1-0]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -603,7 +603,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -631,16 +631,16 @@
# name: test_async_join_players[group_members0-1-0].1
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
}),
}),
@@ -680,16 +680,16 @@
# name: test_async_join_players[group_members1-0-1]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -701,7 +701,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -729,16 +729,16 @@
# name: test_async_join_players[group_members1-0-1].1
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
}),
}),
@@ -778,16 +778,16 @@
# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -828,16 +828,16 @@
# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
}),
}),
@@ -877,16 +877,16 @@
# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity]
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -898,7 +898,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_content_type': <BangOlufsenMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
'sound_mode': 'Test Listening Mode (123)',
@@ -926,16 +926,16 @@
# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
}),
}),
@@ -975,16 +975,16 @@
# name: test_async_unjoin_player
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -1023,15 +1023,15 @@
# name: test_async_update_beolink_listener
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'leader': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LEADER: 'leader'>: dict({
'Laundry room Core': '1111.1111111.22222222@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com',
}),
}),
@@ -1069,16 +1069,16 @@
# name: test_async_update_beolink_listener.1
StateSnapshot({
'attributes': ReadOnlyDict({
'beolink': dict({
'listeners': dict({
<BangOlufsenAttribute.BEOLINK: 'beolink'>: dict({
<BangOlufsenAttribute.BEOLINK_LISTENERS: 'listeners'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'peers': dict({
<BangOlufsenAttribute.BEOLINK_PEERS: 'peers'>: dict({
'Bedroom Premiere': '1111.1111111.33333333@products.bang-olufsen.com',
'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com',
}),
'self': dict({
<BangOlufsenAttribute.BEOLINK_SELF: 'self'>: dict({
'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com',
}),
}),

View File

@@ -24,6 +24,7 @@ from homeassistant.components.bang_olufsen.const import (
BANG_OLUFSEN_REPEAT_FROM_HA,
BANG_OLUFSEN_STATES,
DOMAIN,
BangOlufsenMediaType,
BangOlufsenSource,
)
from homeassistant.components.media_player import (
@@ -260,6 +261,7 @@ async def test_async_update_playback_metadata(
assert ATTR_MEDIA_ALBUM_ARTIST not in states.attributes
assert ATTR_MEDIA_TRACK not in states.attributes
assert ATTR_MEDIA_CHANNEL not in states.attributes
assert ATTR_MEDIA_CONTENT_ID not in states.attributes
# Send the WebSocket event dispatch
playback_metadata_callback(TEST_PLAYBACK_METADATA)
@@ -276,6 +278,12 @@ async def test_async_update_playback_metadata(
)
assert states.attributes[ATTR_MEDIA_TRACK] == TEST_PLAYBACK_METADATA.track
assert states.attributes[ATTR_MEDIA_CHANNEL] == TEST_PLAYBACK_METADATA.organization
assert states.attributes[ATTR_MEDIA_CHANNEL] == TEST_PLAYBACK_METADATA.organization
assert (
states.attributes[ATTR_MEDIA_CONTENT_ID]
== TEST_PLAYBACK_METADATA.source_internal_id
)
assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC
async def test_async_update_playback_error(
@@ -342,28 +350,47 @@ async def test_async_update_playback_state(
@pytest.mark.parametrize(
("source", "content_type", "progress", "metadata"),
("source", "content_type", "progress", "metadata", "content_id_available"),
[
# Normal source, music mediatype expected
(
TEST_SOURCE,
MediaType.MUSIC,
TEST_PLAYBACK_PROGRESS.progress,
PlaybackContentMetadata(),
),
# URI source, url media type expected
(
BangOlufsenSource.URI_STREAMER,
MediaType.URL,
TEST_PLAYBACK_PROGRESS.progress,
PlaybackContentMetadata(),
False,
),
# Line-In source,media type expected, progress 0 expected
# Line-In source, music media type expected, progress 0 expected
(
BangOlufsenSource.LINE_IN,
MediaType.MUSIC,
0,
PlaybackContentMetadata(),
False,
),
# Tidal source, tidal media type expected, media content id expected
(
BangOlufsenSource.TIDAL,
BangOlufsenMediaType.TIDAL,
TEST_PLAYBACK_PROGRESS.progress,
PlaybackContentMetadata(source_internal_id="123"),
True,
),
# Deezer source, deezer media type expected, media content id expected
(
BangOlufsenSource.DEEZER,
BangOlufsenMediaType.DEEZER,
TEST_PLAYBACK_PROGRESS.progress,
PlaybackContentMetadata(source_internal_id="123"),
True,
),
# Radio source, radio media type expected, media content id expected
(
BangOlufsenSource.NET_RADIO,
BangOlufsenMediaType.RADIO,
TEST_PLAYBACK_PROGRESS.progress,
PlaybackContentMetadata(source_internal_id="123"),
True,
),
],
)
@@ -375,6 +402,7 @@ async def test_async_update_source_change(
content_type: MediaType,
progress: int,
metadata: PlaybackContentMetadata,
content_id_available: bool,
) -> None:
"""Test _async_update_source_change."""
playback_progress_callback = (
@@ -402,6 +430,7 @@ async def test_async_update_source_change(
assert states.attributes[ATTR_INPUT_SOURCE] == source.name
assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type
assert states.attributes[ATTR_MEDIA_POSITION] == progress
assert (ATTR_MEDIA_CONTENT_ID in states.attributes) == content_id_available
async def test_async_turn_off(

View File

@@ -167,23 +167,28 @@ async def test_set_temperature(
assert hass.states.get("climate.l1_100").attributes[ATTR_TEMPERATURE] == 30
@pytest.mark.parametrize("target_fan_mode", FAN_MODES)
async def test_set_fan_mode(
hass: HomeAssistant,
load_int: ConfigEntry,
target_fan_mode: str,
) -> None:
"""Test the Coolmaster climate set fan mode."""
assert hass.states.get("climate.l1_100").attributes[ATTR_FAN_MODE] == FAN_LOW
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_FAN_MODE,
{
ATTR_ENTITY_ID: "climate.l1_100",
ATTR_FAN_MODE: FAN_HIGH,
ATTR_FAN_MODE: target_fan_mode,
},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("climate.l1_100").attributes[ATTR_FAN_MODE] == FAN_HIGH
assert (
hass.states.get("climate.l1_100").attributes[ATTR_FAN_MODE] == target_fan_mode
)
async def test_set_swing_mode(

View File

@@ -20,6 +20,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
@@ -206,12 +207,13 @@ def mock_device_response() -> list[dict[str, Any]]:
@pytest.fixture(autouse=True)
def mock_devices(requests_mock: Mocker, devices_response: dict[str, Any]) -> None:
def mock_devices(
aioclient_mock: AiohttpClientMocker, devices_response: dict[str, Any]
) -> None:
"""Fixture to setup fake device responses."""
requests_mock.register_uri(
"GET",
aioclient_mock.get(
DEVICES_API_URL,
status_code=HTTPStatus.OK,
status=HTTPStatus.OK,
json=devices_response,
)

View File

@@ -4,7 +4,6 @@ from collections.abc import Awaitable, Callable
from http import HTTPStatus
import pytest
from requests_mock.mocker import Mocker
from homeassistant.components.fitbit.const import (
CONF_CLIENT_ID,
@@ -90,14 +89,18 @@ async def test_token_refresh_success(
assert await integration_setup()
assert config_entry.state is ConfigEntryState.LOADED
# Verify token request
assert len(aioclient_mock.mock_calls) == 1
# Verify token request and that the device API is called with new token
assert len(aioclient_mock.mock_calls) == 2
assert aioclient_mock.mock_calls[0][2] == {
CONF_CLIENT_ID: CLIENT_ID,
CONF_CLIENT_SECRET: CLIENT_SECRET,
"grant_type": "refresh_token",
"refresh_token": FAKE_REFRESH_TOKEN,
}
assert str(aioclient_mock.mock_calls[1][1]) == DEVICES_API_URL
assert aioclient_mock.mock_calls[1][3].get("Authorization") == (
"Bearer server-access-token"
)
# Verify updated token
assert (
@@ -144,15 +147,15 @@ async def test_device_update_coordinator_failure(
integration_setup: Callable[[], Awaitable[bool]],
config_entry: MockConfigEntry,
setup_credentials: None,
requests_mock: Mocker,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test case where the device update coordinator fails on the first request."""
assert config_entry.state is ConfigEntryState.NOT_LOADED
requests_mock.register_uri(
"GET",
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
status=HTTPStatus.INTERNAL_SERVER_ERROR,
)
assert not await integration_setup()
@@ -164,15 +167,15 @@ async def test_device_update_coordinator_reauth(
integration_setup: Callable[[], Awaitable[bool]],
config_entry: MockConfigEntry,
setup_credentials: None,
requests_mock: Mocker,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test case where the device update coordinator fails on the first request."""
assert config_entry.state is ConfigEntryState.NOT_LOADED
requests_mock.register_uri(
"GET",
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
status_code=HTTPStatus.UNAUTHORIZED,
status=HTTPStatus.UNAUTHORIZED,
json={
"errors": [{"errorType": "invalid_grant"}],
},

View File

@@ -29,6 +29,7 @@ from .conftest import (
)
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
DEVICE_RESPONSE_CHARGE_2 = {
"battery": "Medium",
@@ -736,31 +737,13 @@ async def test_device_battery_level_update_failed(
hass: HomeAssistant,
setup_credentials: None,
integration_setup: Callable[[], Awaitable[bool]],
requests_mock: Mocker,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test API failure for a battery level sensor for devices."""
requests_mock.register_uri(
"GET",
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
[
{
"status_code": HTTPStatus.OK,
"json": [DEVICE_RESPONSE_CHARGE_2],
},
# Fail when requesting an update
{
"status_code": HTTPStatus.INTERNAL_SERVER_ERROR,
"json": {
"errors": [
{
"errorType": "request",
"message": "An error occurred",
}
]
},
},
],
json=[DEVICE_RESPONSE_CHARGE_2],
)
assert await integration_setup()
@@ -770,6 +753,19 @@ async def test_device_battery_level_update_failed(
assert state.state == "Medium"
# Request an update for the entity which will fail
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
status=HTTPStatus.INTERNAL_SERVER_ERROR,
json={
"errors": [
{
"errorType": "request",
"message": "An error occurred",
}
]
},
)
await async_update_entity(hass, "sensor.charge_2_battery")
await hass.async_block_till_done()
@@ -791,28 +787,15 @@ async def test_device_battery_level_reauth_required(
setup_credentials: None,
integration_setup: Callable[[], Awaitable[bool]],
config_entry: MockConfigEntry,
requests_mock: Mocker,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test API failure requires reauth."""
requests_mock.register_uri(
"GET",
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
[
{
"status_code": HTTPStatus.OK,
"json": [DEVICE_RESPONSE_CHARGE_2],
},
# Fail when requesting an update
{
"status_code": HTTPStatus.UNAUTHORIZED,
"json": {
"errors": [{"errorType": "invalid_grant"}],
},
},
],
json=[DEVICE_RESPONSE_CHARGE_2],
)
assert await integration_setup()
state = hass.states.get("sensor.charge_2_battery")
@@ -820,6 +803,14 @@ async def test_device_battery_level_reauth_required(
assert state.state == "Medium"
# Request an update for the entity which will fail
aioclient_mock.clear_requests()
aioclient_mock.get(
DEVICES_API_URL,
status=HTTPStatus.UNAUTHORIZED,
json={
"errors": [{"errorType": "invalid_grant"}],
},
)
await async_update_entity(hass, "sensor.charge_2_battery")
await hass.async_block_till_done()

View File

@@ -3,7 +3,7 @@
from datetime import timedelta
from unittest.mock import AsyncMock, Mock, call, patch
from aioshelly.const import MODEL_BULB, MODEL_BUTTON1
from aioshelly.const import MODEL_2PM_G3, MODEL_BULB, MODEL_BUTTON1
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -29,6 +29,8 @@ from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
from homeassistant.const import ATTR_DEVICE_ID, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import Event, HomeAssistant, State
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceRegistry
from homeassistant.helpers.entity_registry import EntityRegistry
from . import (
MOCK_MAC,
@@ -40,7 +42,11 @@ from . import (
register_entity,
)
from tests.common import async_fire_time_changed, mock_restore_cache
from tests.common import (
async_fire_time_changed,
async_load_json_object_fixture,
mock_restore_cache,
)
RELAY_BLOCK_ID = 0
LIGHT_BLOCK_ID = 2
@@ -927,6 +933,7 @@ async def test_rpc_runs_connected_events_when_initialized(
hass: HomeAssistant,
mock_rpc_device: Mock,
monkeypatch: pytest.MonkeyPatch,
caplog: pytest.LogCaptureFixture,
supports_scripts: bool,
zigbee_firmware: bool,
result: bool,
@@ -950,6 +957,13 @@ async def test_rpc_runs_connected_events_when_initialized(
# BLE script list is called during connected events if device supports scripts
# and Zigbee is disabled
assert bool(call.script_list() in mock_rpc_device.mock_calls) == result
assert "Device Test name already connected" not in caplog.text
# Mock initialized event after already initialized
caplog.clear()
mock_rpc_device.mock_initialized()
await hass.async_block_till_done()
assert "Device Test name already connected" in caplog.text
async def test_rpc_sleeping_device_unload_ignore_ble_scanner(
@@ -1139,3 +1153,70 @@ async def test_xmod_model_lookup(
)
assert device
assert device.model == xmod_model
async def test_sub_device_area_from_main_device(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
device_registry: DeviceRegistry,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test Shelly sub-device area is set to main device area when created."""
device_fixture = await async_load_json_object_fixture(hass, "2pm_gen3.json", DOMAIN)
monkeypatch.setattr(mock_rpc_device, "shelly", device_fixture["shelly"])
monkeypatch.setattr(mock_rpc_device, "status", device_fixture["status"])
monkeypatch.setattr(mock_rpc_device, "config", device_fixture["config"])
config_entry = await init_integration(
hass, gen=3, model=MODEL_2PM_G3, skip_setup=True
)
# create main device and set area
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
name="Test name",
connections={(CONNECTION_NETWORK_MAC, MOCK_MAC)},
identifiers={(DOMAIN, MOCK_MAC)},
suggested_area="living_room",
)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# verify sub-devices have the same area as main device
for relay_index in range(2):
entity_id = f"switch.test_name_switch_{relay_index}"
assert hass.states.get(entity_id) is not None
entry = entity_registry.async_get(entity_id)
assert entry
device_entry = device_registry.async_get(entry.device_id)
assert device_entry
assert device_entry.area_id == "living_room"
@pytest.mark.parametrize("restart_required", [True, False])
async def test_rpc_ble_scanner_enable_reboot(
hass: HomeAssistant,
mock_rpc_device,
monkeypatch: pytest.MonkeyPatch,
caplog: pytest.LogCaptureFixture,
restart_required: bool,
) -> None:
"""Test RPC BLE scanner enabling requires reboot."""
monkeypatch.setattr(
mock_rpc_device,
"ble_getconfig",
AsyncMock(return_value={"enable": False}),
)
monkeypatch.setattr(
mock_rpc_device,
"ble_setconfig",
AsyncMock(return_value={"restart_required": restart_required}),
)
await init_integration(
hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE}
)
assert bool("BLE enable required a reboot" in caplog.text) == restart_required
assert mock_rpc_device.trigger_reboot.call_count == int(restart_required)

View File

@@ -1,5 +1,18 @@
"""Tests for Transmission."""
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
"""Fixture for setting up the component."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
OLD_MOCK_CONFIG_DATA = {
"name": "Transmission",
"host": "0.0.0.0",

View File

@@ -0,0 +1,103 @@
"""Transmission tests configuration."""
from collections.abc import Generator
from datetime import UTC, datetime
from unittest.mock import AsyncMock, patch
import pytest
from transmission_rpc.session import Session, SessionStats
from transmission_rpc.torrent import Torrent
from homeassistant.components.transmission.const import DOMAIN
from . import MOCK_CONFIG_DATA
from tests.common import MockConfigEntry
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock]:
"""Override async_setup_entry."""
with patch(
"homeassistant.components.transmission.async_setup_entry",
return_value=True,
) as mock_setup_entry:
yield mock_setup_entry
@pytest.fixture
def mock_config_entry() -> MockConfigEntry:
"""Mock a config entry."""
return MockConfigEntry(
domain=DOMAIN,
title="Transmission",
data=MOCK_CONFIG_DATA,
entry_id="01J0BC4QM2YBRP6H5G933AETT7",
)
@pytest.fixture
def mock_transmission_client() -> Generator[AsyncMock]:
"""Mock a Transmission client."""
with (
patch(
"homeassistant.components.transmission.transmission_rpc.Client",
autospec=False,
) as mock_client_class,
):
client = mock_client_class.return_value
client.server_version = "4.0.5 (a6fe2a64aa)"
session_stats_data = {
"uploadSpeed": 1,
"downloadSpeed": 1,
"activeTorrentCount": 0,
"pausedTorrentCount": 0,
"torrentCount": 0,
}
client.session_stats.return_value = SessionStats(fields=session_stats_data)
session_data = {"alt-speed-enabled": False}
client.get_session.return_value = Session(fields=session_data)
client.get_torrents.return_value = []
yield mock_client_class
@pytest.fixture
def mock_torrent():
"""Fixture that returns a factory function to create mock torrents."""
def _create_mock_torrent(
torrent_id: int = 1,
name: str = "Test Torrent",
percent_done: float = 0.5,
status: int = 4,
download_dir: str = "/downloads",
eta: int = 3600,
added_date: datetime | None = None,
ratio: float = 1.5,
) -> Torrent:
"""Create a mock torrent with all required attributes."""
if added_date is None:
added_date = datetime(2025, 11, 26, 14, 18, 0, tzinfo=UTC)
torrent_data = {
"id": torrent_id,
"name": name,
"percentDone": percent_done,
"status": status,
"rateDownload": 0,
"rateUpload": 0,
"downloadDir": download_dir,
"eta": eta,
"addedDate": int(added_date.timestamp()),
"uploadRatio": ratio,
"error": 0,
"errorString": "",
}
return Torrent(fields=torrent_data)
return _create_mock_torrent

View File

@@ -0,0 +1,430 @@
# serializer version: 1
# name: test_sensors[sensor.transmission_active_torrents-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_active_torrents',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Active torrents',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'active_torrents',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-active_torrents',
'unit_of_measurement': 'torrents',
})
# ---
# name: test_sensors[sensor.transmission_active_torrents-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Active torrents',
'torrent_info': dict({
}),
'unit_of_measurement': 'torrents',
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_active_torrents',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.transmission_completed_torrents-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_completed_torrents',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Completed torrents',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'completed_torrents',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-completed_torrents',
'unit_of_measurement': 'torrents',
})
# ---
# name: test_sensors[sensor.transmission_completed_torrents-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Completed torrents',
'torrent_info': dict({
}),
'unit_of_measurement': 'torrents',
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_completed_torrents',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.transmission_download_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_download_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 2,
}),
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
}),
}),
'original_device_class': <SensorDeviceClass.DATA_RATE: 'data_rate'>,
'original_icon': None,
'original_name': 'Download speed',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'download_speed',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-download',
'unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
})
# ---
# name: test_sensors[sensor.transmission_download_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'data_rate',
'friendly_name': 'Transmission Download speed',
'unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_download_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1e-06',
})
# ---
# name: test_sensors[sensor.transmission_paused_torrents-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_paused_torrents',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Paused torrents',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'paused_torrents',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-paused_torrents',
'unit_of_measurement': 'torrents',
})
# ---
# name: test_sensors[sensor.transmission_paused_torrents-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Paused torrents',
'torrent_info': dict({
}),
'unit_of_measurement': 'torrents',
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_paused_torrents',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.transmission_started_torrents-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_started_torrents',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Started torrents',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'started_torrents',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-started_torrents',
'unit_of_measurement': 'torrents',
})
# ---
# name: test_sensors[sensor.transmission_started_torrents-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Started torrents',
'torrent_info': dict({
}),
'unit_of_measurement': 'torrents',
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_started_torrents',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.transmission_status-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'idle',
'up_down',
'seeding',
'downloading',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_status',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
'original_icon': None,
'original_name': 'Status',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'transmission_status',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-status',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[sensor.transmission_status-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'enum',
'friendly_name': 'Transmission Status',
'options': list([
'idle',
'up_down',
'seeding',
'downloading',
]),
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_status',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'up_down',
})
# ---
# name: test_sensors[sensor.transmission_total_torrents-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_total_torrents',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Total torrents',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'total_torrents',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-total_torrents',
'unit_of_measurement': 'torrents',
})
# ---
# name: test_sensors[sensor.transmission_total_torrents-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Total torrents',
'torrent_info': dict({
}),
'unit_of_measurement': 'torrents',
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_total_torrents',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[sensor.transmission_upload_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.transmission_upload_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 2,
}),
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
}),
}),
'original_device_class': <SensorDeviceClass.DATA_RATE: 'data_rate'>,
'original_icon': None,
'original_name': 'Upload speed',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'upload_speed',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-upload',
'unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
})
# ---
# name: test_sensors[sensor.transmission_upload_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'data_rate',
'friendly_name': 'Transmission Upload speed',
'unit_of_measurement': <UnitOfDataRate.MEGABYTES_PER_SECOND: 'MB/s'>,
}),
'context': <ANY>,
'entity_id': 'sensor.transmission_upload_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1e-06',
})
# ---

View File

@@ -0,0 +1,97 @@
# serializer version: 1
# name: test_switches[switch.transmission_switch-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.transmission_switch',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Switch',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'on_off',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-on_off',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.transmission_switch-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Switch',
}),
'context': <ANY>,
'entity_id': 'switch.transmission_switch',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_switches[switch.transmission_turtle_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.transmission_turtle_mode',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Turtle mode',
'platform': 'transmission',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'turtle_mode',
'unique_id': '01J0BC4QM2YBRP6H5G933AETT7-turtle_mode',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.transmission_turtle_mode-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Transmission Turtle mode',
}),
'context': <ANY>,
'entity_id': 'switch.transmission_turtle_mode',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---

View File

@@ -1,6 +1,6 @@
"""Tests for Transmission config flow."""
from unittest.mock import MagicMock, patch
from unittest.mock import AsyncMock, patch
import pytest
from transmission_rpc.error import (
@@ -15,34 +15,26 @@ from homeassistant.components.transmission.const import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from . import MOCK_CONFIG_DATA
from . import MOCK_CONFIG_DATA, setup_integration
from tests.common import MockConfigEntry
@pytest.fixture(autouse=True)
def mock_api():
"""Mock an api."""
with patch("transmission_rpc.Client") as api:
yield api
async def test_form(hass: HomeAssistant) -> None:
"""Test we get the form."""
async def test_full_flow(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test full flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
with patch(
"homeassistant.components.transmission.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
)
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
)
assert len(mock_setup_entry.mock_calls) == 1
assert result["title"] == "Transmission"
@@ -52,10 +44,10 @@ async def test_form(hass: HomeAssistant) -> None:
async def test_device_already_configured(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test aborting if the device is already configured."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
@@ -72,7 +64,10 @@ async def test_device_already_configured(
assert result["type"] is FlowResultType.ABORT
async def test_options(hass: HomeAssistant) -> None:
async def test_options(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test updating options."""
entry = MockConfigEntry(
domain=transmission.DOMAIN,
@@ -103,14 +98,15 @@ async def test_options(hass: HomeAssistant) -> None:
async def test_error_on_wrong_credentials(
hass: HomeAssistant, mock_api: MagicMock
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
) -> None:
"""Test we handle invalid credentials."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_api.side_effect = TransmissionAuthError()
mock_transmission_client.side_effect = TransmissionAuthError()
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
@@ -121,7 +117,7 @@ async def test_error_on_wrong_credentials(
"password": "invalid_auth",
}
mock_api.side_effect = None
mock_transmission_client.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
@@ -133,12 +129,13 @@ async def test_error_on_wrong_credentials(
("exception", "error"),
[
(TransmissionError, "cannot_connect"),
(TransmissionConnectError, "invalid_auth"),
(TransmissionConnectError, "cannot_connect"),
],
)
async def test_flow_errors(
hass: HomeAssistant,
mock_api: MagicMock,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
exception: Exception,
error: str,
) -> None:
@@ -147,15 +144,15 @@ async def test_flow_errors(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_api.side_effect = exception
mock_transmission_client.side_effect = exception
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": "cannot_connect"}
assert result["errors"] == {"base": error}
mock_api.side_effect = None
mock_transmission_client.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_CONFIG_DATA,
@@ -163,18 +160,21 @@ async def test_flow_errors(
assert result["type"] is FlowResultType.CREATE_ENTRY
async def test_reauth_success(hass: HomeAssistant) -> None:
async def test_reauth_success(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test we can reauth."""
entry = MockConfigEntry(domain=transmission.DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
await setup_integration(hass, mock_config_entry)
result = await entry.start_reauth_flow(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
assert result["description_placeholders"] == {
"username": "user",
"name": "Mock Title",
"name": "Transmission",
}
with patch(
@@ -203,7 +203,8 @@ async def test_reauth_success(hass: HomeAssistant) -> None:
)
async def test_reauth_flow_errors(
hass: HomeAssistant,
mock_api: MagicMock,
mock_config_entry: MockConfigEntry,
mock_transmission_client: AsyncMock,
exception: Exception,
field: str,
error: str,
@@ -224,7 +225,7 @@ async def test_reauth_flow_errors(
"name": "Mock Title",
}
mock_api.side_effect = exception
mock_transmission_client.side_effect = exception
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
@@ -235,7 +236,7 @@ async def test_reauth_flow_errors(
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {field: error}
mock_api.side_effect = None
mock_transmission_client.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{

View File

@@ -1,7 +1,8 @@
"""Tests for Transmission init."""
from unittest.mock import MagicMock, patch
from unittest.mock import AsyncMock
from freezegun.api import FrozenDateTimeFactory
import pytest
from transmission_rpc.error import (
TransmissionAuthError,
@@ -13,6 +14,7 @@ from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.components.transmission.const import (
DEFAULT_PATH,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SSL,
DOMAIN,
)
@@ -21,30 +23,14 @@ from homeassistant.const import CONF_PATH, CONF_SSL
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import MOCK_CONFIG_DATA, MOCK_CONFIG_DATA_VERSION_1_1, OLD_MOCK_CONFIG_DATA
from . import MOCK_CONFIG_DATA_VERSION_1_1, OLD_MOCK_CONFIG_DATA
from tests.common import MockConfigEntry
from tests.common import MockConfigEntry, async_fire_time_changed
@pytest.fixture(autouse=True)
def mock_api():
"""Mock an api."""
with patch("transmission_rpc.Client") as api:
yield api
async def test_successful_config_entry(hass: HomeAssistant) -> None:
"""Test settings up integration from config entry."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.LOADED
async def test_config_flow_entry_migrate_1_1_to_1_2(hass: HomeAssistant) -> None:
async def test_config_flow_entry_migrate_1_1_to_1_2(
hass: HomeAssistant,
) -> None:
"""Test that config flow entry is migrated correctly from v1.1 to v1.2."""
entry = MockConfigEntry(
domain=DOMAIN,
@@ -66,59 +52,65 @@ async def test_config_flow_entry_migrate_1_1_to_1_2(hass: HomeAssistant) -> None
async def test_setup_failed_connection_error(
hass: HomeAssistant, mock_api: MagicMock
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test integration failed due to connection error."""
mock_config_entry.add_to_hass(hass)
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_transmission_client.side_effect = TransmissionConnectError()
mock_api.side_effect = TransmissionConnectError()
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_RETRY
await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
async def test_setup_failed_auth_error(
hass: HomeAssistant, mock_api: MagicMock
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test integration failed due to invalid credentials error."""
mock_config_entry.add_to_hass(hass)
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_transmission_client.side_effect = TransmissionAuthError()
mock_api.side_effect = TransmissionAuthError()
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_ERROR
await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
async def test_setup_failed_unexpected_error(
hass: HomeAssistant, mock_api: MagicMock
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test integration failed due to unexpected error."""
mock_config_entry.add_to_hass(hass)
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_transmission_client.side_effect = TransmissionError()
mock_api.side_effect = TransmissionError()
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_ERROR
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
async def test_unload_entry(hass: HomeAssistant) -> None:
async def test_unload_entry(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test removing integration."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_DATA)
entry.add_to_hass(hass)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert await hass.config_entries.async_unload(entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.NOT_LOADED
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
@pytest.mark.parametrize(
@@ -184,3 +176,28 @@ async def test_migrate_unique_id(
assert migrated_entity
assert migrated_entity.unique_id == new_unique_id
async def test_coordinator_update_error(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test the sensors go unavailable."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
# Make the coordinator fail on next update
client = mock_transmission_client.return_value
client.session_stats.side_effect = TransmissionError("Connection failed")
# Trigger an update to make entities unavailable
freezer.tick(DEFAULT_SCAN_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
# Verify entities are unavailable
state = hass.states.get("sensor.transmission_status")
assert state is not None
assert state.state == "unavailable"

View File

@@ -0,0 +1,27 @@
"""Tests for the Transmission sensor platform."""
from unittest.mock import AsyncMock, patch
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_sensors(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the sensor entities."""
with patch("homeassistant.components.transmission.PLATFORMS", [Platform.SENSOR]):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)

View File

@@ -0,0 +1,254 @@
"""Tests for the Transmission services."""
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from homeassistant.components.transmission.const import (
ATTR_DELETE_DATA,
ATTR_DOWNLOAD_PATH,
ATTR_TORRENT,
CONF_ENTRY_ID,
DOMAIN,
SERVICE_ADD_TORRENT,
SERVICE_REMOVE_TORRENT,
SERVICE_START_TORRENT,
SERVICE_STOP_TORRENT,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from tests.common import MockConfigEntry
async def test_service_config_entry_not_loaded_state(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test service call when config entry is in failed state."""
mock_config_entry.add_to_hass(hass)
assert mock_config_entry.state == ConfigEntryState.NOT_LOADED
with pytest.raises(ServiceValidationError, match="service_not_found"):
await hass.services.async_call(
DOMAIN,
SERVICE_ADD_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
ATTR_TORRENT: "magnet:?xt=urn:btih:test",
},
blocking=True,
)
async def test_service_integration_not_found(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test service call with non-existent config entry."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
with pytest.raises(
ServiceValidationError, match='Integration "transmission" not found'
):
await hass.services.async_call(
DOMAIN,
SERVICE_ADD_TORRENT,
{
CONF_ENTRY_ID: "non_existent_entry_id",
ATTR_TORRENT: "magnet:?xt=urn:btih:test",
},
blocking=True,
)
@pytest.mark.parametrize(
("payload", "expected_torrent", "kwargs"),
[
(
{ATTR_TORRENT: "magnet:?xt=urn:btih:test"},
"magnet:?xt=urn:btih:test",
{},
),
(
{
ATTR_TORRENT: "magnet:?xt=urn:btih:test",
ATTR_DOWNLOAD_PATH: "/custom/path",
},
"magnet:?xt=urn:btih:test",
{"download_dir": "/custom/path"},
),
(
{ATTR_TORRENT: "http://example.com/test.torrent"},
"http://example.com/test.torrent",
{},
),
(
{ATTR_TORRENT: "ftp://example.com/test.torrent"},
"ftp://example.com/test.torrent",
{},
),
(
{ATTR_TORRENT: "/config/test.torrent"},
"/config/test.torrent",
{},
),
],
)
async def test_add_torrent_service_success(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
payload: dict[str, str],
expected_torrent: str,
kwargs: dict[str, str | None],
) -> None:
"""Test successful torrent addition with url and path sources."""
client = mock_transmission_client.return_value
client.add_torrent.return_value = MagicMock(id=123, name="test_torrent")
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
full_service_data = {CONF_ENTRY_ID: mock_config_entry.entry_id} | payload
with patch.object(hass.config, "is_allowed_path", return_value=True):
await hass.services.async_call(
DOMAIN,
SERVICE_ADD_TORRENT,
full_service_data,
blocking=True,
)
client.add_torrent.assert_called_once_with(expected_torrent, **kwargs)
async def test_add_torrent_service_invalid_path(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test torrent addition with invalid path."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
with pytest.raises(ServiceValidationError, match="Could not add torrent"):
await hass.services.async_call(
DOMAIN,
SERVICE_ADD_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
ATTR_TORRENT: "/etc/bad.torrent",
},
blocking=True,
)
async def test_start_torrent_service_success(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test successful torrent start."""
client = mock_transmission_client.return_value
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_START_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
CONF_ID: 123,
},
blocking=True,
)
client.start_torrent.assert_called_once_with(123)
async def test_stop_torrent_service_success(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test successful torrent stop."""
client = mock_transmission_client.return_value
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_STOP_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
CONF_ID: 456,
},
blocking=True,
)
client.stop_torrent.assert_called_once_with(456)
async def test_remove_torrent_service_success(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test successful torrent removal without deleting data."""
client = mock_transmission_client.return_value
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_REMOVE_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
CONF_ID: 789,
},
blocking=True,
)
client.remove_torrent.assert_called_once_with(789, delete_data=False)
async def test_remove_torrent_service_with_delete_data(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test successful torrent removal with deleting data."""
client = mock_transmission_client.return_value
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_REMOVE_TORRENT,
{
CONF_ENTRY_ID: mock_config_entry.entry_id,
CONF_ID: 789,
ATTR_DELETE_DATA: True,
},
blocking=True,
)
client.remove_torrent.assert_called_once_with(789, delete_data=True)

View File

@@ -0,0 +1,131 @@
"""Tests for the Transmission switch platform."""
from unittest.mock import AsyncMock, patch
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_switches(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the switch entities."""
with patch("homeassistant.components.transmission.PLATFORMS", [Platform.SWITCH]):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
("service", "api_method"),
[
(SERVICE_TURN_ON, "start_all"),
(SERVICE_TURN_OFF, "stop_torrent"),
],
)
async def test_on_off_switch_without_torrents(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
mock_torrent,
service: str,
api_method: str,
) -> None:
"""Test on/off switch."""
client = mock_transmission_client.return_value
client.get_torrents.return_value = []
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
SWITCH_DOMAIN,
service,
{ATTR_ENTITY_ID: "switch.transmission_switch"},
blocking=True,
)
getattr(client, api_method).assert_not_called()
@pytest.mark.parametrize(
("service", "api_method"),
[
(SERVICE_TURN_ON, "start_all"),
(SERVICE_TURN_OFF, "stop_torrent"),
],
)
async def test_on_off_switch_with_torrents(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
mock_torrent,
service: str,
api_method: str,
) -> None:
"""Test on/off switch."""
client = mock_transmission_client.return_value
client.get_torrents.return_value = [mock_torrent()]
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
SWITCH_DOMAIN,
service,
{ATTR_ENTITY_ID: "switch.transmission_switch"},
blocking=True,
)
getattr(client, api_method).assert_called_once()
@pytest.mark.parametrize(
("service", "alt_speed_enabled"),
[
(SERVICE_TURN_ON, True),
(SERVICE_TURN_OFF, False),
],
)
async def test_turtle_mode_switch(
hass: HomeAssistant,
mock_transmission_client: AsyncMock,
mock_config_entry: MockConfigEntry,
service: str,
alt_speed_enabled: bool,
) -> None:
"""Test turtle mode switch."""
client = mock_transmission_client.return_value
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
SWITCH_DOMAIN,
service,
{ATTR_ENTITY_ID: "switch.transmission_turtle_mode"},
blocking=True,
)
client.set_session.assert_called_once_with(alt_speed_enabled=alt_speed_enabled)

View File

@@ -334,6 +334,35 @@ async def test_refresh_no_update_method(
await crd.async_refresh()
async def test_refresh_cancelled(
hass: HomeAssistant,
crd: update_coordinator.DataUpdateCoordinator[int],
) -> None:
"""Test that we don't swallow cancellation."""
await crd.async_refresh()
start = asyncio.Event()
abort = asyncio.Event()
async def _update() -> bool:
start.set()
await abort.wait()
return True
crd.update_method = _update
crd.last_update_success = True
task = hass.async_create_task(crd.async_refresh())
await start.wait()
task.cancel()
with pytest.raises(asyncio.CancelledError):
await task
abort.set()
assert crd.last_update_success is False
async def test_update_interval(
hass: HomeAssistant,
freezer: FrozenDateTimeFactory,

View File

@@ -9727,3 +9727,31 @@ async def test_config_flow_create_entry_with_next_flow(hass: HomeAssistant) -> N
assert result["next_flow"][0] == config_entries.FlowType.CONFIG_FLOW
# Verify the target flow exists
hass.config_entries.flow.async_get(result["next_flow"][1])
async def test_canceled_exceptions_are_propagated(
hass: HomeAssistant, manager: config_entries.ConfigEntries
) -> None:
"""Tests that base exceptions like cancellations are not swallowed."""
entry = MockConfigEntry(title="test_title", domain="test")
start = asyncio.Event()
abort = asyncio.Event()
async def _setup(_: HomeAssistant, __: ConfigEntry) -> bool:
start.set()
await abort.wait()
return True
mock_integration(hass, MockModule("test", async_setup_entry=_setup))
mock_platform(hass, "test.config_flow", None)
entry.add_to_hass(hass)
task = hass.async_create_task(manager.async_setup(entry.entry_id))
await start.wait()
task.cancel()
with pytest.raises(asyncio.CancelledError):
await task
abort.set()

View File

@@ -13,6 +13,5 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Mock an unsuccessful entry setup."""
asyncio.current_task().cancel()
await asyncio.sleep(0)
"""Mock an leaked cancellation, without our own task being cancelled."""
raise asyncio.CancelledError

View File

@@ -1054,6 +1054,12 @@ _CONVERTED_VALUE: dict[
10,
UnitOfVolumeFlowRate.LITERS_PER_SECOND,
),
(
24,
UnitOfVolumeFlowRate.GALLONS_PER_DAY,
1,
UnitOfVolumeFlowRate.GALLONS_PER_HOUR,
),
],
}